In [1]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [2]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [3]:
# where am i?
%pwd
Out[3]:
'C:\\Users\\david\\Documents\\ImageNet'
In [4]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [5]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_00366.jpg
Showing: ./data\flr_00385.jpg
Showing: ./data\flr_00160.jpg
Showing: ./data\flr_00971.jpg
Showing: ./data\flr_00293.jpg

Extract the training and testing datasets

In [6]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8.npz')['arr_0']
In [7]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (4264, 96, 96, 3), (4264,), (1067, 96, 96, 3), (1067,)
In [8]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 354 index image, It is 1.0
Showing 550 index image, It is 1.0
Showing 150 index image, It is 1.0
Showing 918 index image, It is 0.0
Showing 162 index image, It is 1.0
Showing 928 index image, It is 0.0
Showing 902 index image, It is 0.0
Showing 986 index image, It is 0.0
Showing 855 index image, It is 0.0
Showing 23 index image, It is 1.0
Showing 974 index image, It is 0.0
Showing 232 index image, It is 1.0
Showing 63 index image, It is 1.0
Showing 338 index image, It is 1.0
Showing 743 index image, It is 0.0
Showing 877 index image, It is 0.0
Showing 893 index image, It is 0.0
Showing 724 index image, It is 0.0
Showing 672 index image, It is 0.0
Showing 370 index image, It is 1.0
In [9]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [10]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model

def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

#optmz = optimizers.Adam(lr=0.001)
optmz = optimizers.RMSprop(lr=0.001)

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = Dropout(0.30)(v)
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = Dropout(0.40)(v)
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=True, names='Stg4')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=False, names='Stg5')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=256, numBlocks=3,
                 downsampleOnFirst=True, names='Stg6')
    v = Dropout(0.50)(v)
    v = AveragePooling2D(pool_size=6, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs,outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model



# Setup the models
model       = createResNetV1() # This is meant for training
modelGo     = createResNetV1() # This is used for final testing

model.summary()
WARNING:tensorflow:From D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\initializers.py:104: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with distribution=normal is deprecated and will be removed in a future version.
Instructions for updating:
`normal` is a deprecated alias for `truncated_normal`
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
Inpt_conv (Conv2D)              (None, 96, 96, 16)   448         input_1[0][0]                    
__________________________________________________________________________________________________
Inpt_bn (BatchNormalization)    (None, 96, 96, 16)   64          Inpt_conv[0][0]                  
__________________________________________________________________________________________________
Inpt_relu (Activation)          (None, 96, 96, 16)   0           Inpt_bn[0][0]                    
__________________________________________________________________________________________________
Stg1_Blk1_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Inpt_relu[0][0]                  
__________________________________________________________________________________________________
Stg1_Blk1_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_add (Add)             (None, 96, 96, 16)   0           Inpt_relu[0][0]                  
                                                                 Stg1_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk2_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk2_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk1_relu[0][0]             
                                                                 Stg1_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk3_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk3_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk2_relu[0][0]             
                                                                 Stg1_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout (Dropout)               (None, 96, 96, 16)   0           Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res1_conv (Conv2D)    (None, 48, 48, 32)   4640        dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_lin_conv (Conv2D)     (None, 48, 48, 32)   544         dropout[0][0]                    
__________________________________________________________________________________________________
Stg2_Blk1_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_lin_conv[0][0]         
                                                                 Stg2_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk2_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk2_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_relu[0][0]             
                                                                 Stg2_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk3_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk3_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk2_relu[0][0]             
                                                                 Stg2_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_1 (Dropout)             (None, 48, 48, 32)   0           Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res1_conv (Conv2D)    (None, 24, 24, 64)   18496       dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_lin_conv (Conv2D)     (None, 24, 24, 64)   2112        dropout_1[0][0]                  
__________________________________________________________________________________________________
Stg3_Blk1_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_lin_conv[0][0]         
                                                                 Stg3_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk2_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk2_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_relu[0][0]             
                                                                 Stg3_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk3_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk3_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk2_relu[0][0]             
                                                                 Stg3_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_2 (Dropout)             (None, 24, 24, 64)   0           Stg3_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  73856       dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_lin_conv (Conv2D)     (None, 12, 12, 128)  8320        dropout_2[0][0]                  
__________________________________________________________________________________________________
Stg4_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_lin_conv[0][0]         
                                                                 Stg4_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_relu[0][0]             
                                                                 Stg4_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk2_relu[0][0]             
                                                                 Stg4_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 12, 12, 128)  0           Stg4_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      dropout_3[0][0]                  
__________________________________________________________________________________________________
Stg5_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_add (Add)             (None, 12, 12, 128)  0           dropout_3[0][0]                  
                                                                 Stg5_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk1_relu[0][0]             
                                                                 Stg5_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk2_relu[0][0]             
                                                                 Stg5_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_4 (Dropout)             (None, 12, 12, 128)  0           Stg5_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk1_Res1_conv (Conv2D)    (None, 6, 6, 256)    295168      dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_lin_conv (Conv2D)     (None, 6, 6, 256)    33024       dropout_4[0][0]                  
__________________________________________________________________________________________________
Stg6_Blk1_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_lin_conv[0][0]         
                                                                 Stg6_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk2_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk2_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_relu[0][0]             
                                                                 Stg6_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk3_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk3_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk2_relu[0][0]             
                                                                 Stg6_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_5 (Dropout)             (None, 6, 6, 256)    0           Stg6_Blk3_relu[0][0]             
__________________________________________________________________________________________________
AvgPool (AveragePooling2D)      (None, 1, 1, 256)    0           dropout_5[0][0]                  
__________________________________________________________________________________________________
flatten (Flatten)               (None, 256)          0           AvgPool[0][0]                    
__________________________________________________________________________________________________
dense (Dense)                   (None, 2)            514         flatten[0][0]                    
==================================================================================================
Total params: 5,270,786
Trainable params: 5,263,266
Non-trainable params: 7,520
__________________________________________________________________________________________________
In [11]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint,csv_logger]

def lrSchedule(epoch):
    lr  = 1e-3
    
    if epoch > 270: #190
        lr  *= 0.5e-3
        
    elif epoch > 240: #160
        lr  *= 1e-3
        
    elif epoch > 200: #140
        lr  *= 1e-2
        
    elif epoch > 150: #100
        lr  *= 1e-1
        
    print('Learning rate: ', lr)
    
    return lr

LRScheduler     = LearningRateScheduler(lrSchedule)

                            # Create checkpoint for the training
                            # This checkpoint performs model saving when
                            # an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger, LRScheduler]
#callbacks_list  = [checkpoint, csv_logger]
In [12]:
# Fit the model
# This is where the training starts
# model.fit(trDat, 
#           trLbl, 
#           validation_data=(tsDat, tsLbl), 
#           epochs=120, 
#           batch_size=32,
#           callbacks=callbacks_list)

datagen = ImageDataGenerator(width_shift_range=0.25,
                             height_shift_range=0.25,
                             rotation_range=45,
                             zoom_range=0.8,
                             #zca_epsilon=1e-6,
                             #zca_whitening=True,
                             fill_mode='nearest',
                             horizontal_flip=True,
                             vertical_flip=False)

model.fit_generator(datagen.flow(trDat, trLbl, batch_size=16),
                    validation_data=(tsDat, tsLbl),
                    epochs=300, 
                    verbose=1,
                    steps_per_epoch=len(trDat)/16,
                    callbacks=callbacks_list)
Learning rate:  0.001
Epoch 1/300
267/266 [==============================] - 98s 366ms/step - loss: 2.0496 - acc: 0.6262 - val_loss: 1.6170 - val_acc: 0.5586
Learning rate:  0.001
Epoch 2/300
267/266 [==============================] - 56s 210ms/step - loss: 1.1585 - acc: 0.7065 - val_loss: 1.0077 - val_acc: 0.6317
Learning rate:  0.001
Epoch 3/300
267/266 [==============================] - 56s 209ms/step - loss: 0.8894 - acc: 0.7502 - val_loss: 0.8042 - val_acc: 0.7038
Learning rate:  0.001
Epoch 4/300
267/266 [==============================] - 58s 217ms/step - loss: 0.8326 - acc: 0.7509 - val_loss: 0.7050 - val_acc: 0.8135
Learning rate:  0.001
Epoch 5/300
267/266 [==============================] - 56s 209ms/step - loss: 0.7402 - acc: 0.7734 - val_loss: 0.7086 - val_acc: 0.7423
Learning rate:  0.001
Epoch 6/300
267/266 [==============================] - 57s 214ms/step - loss: 0.6897 - acc: 0.7657 - val_loss: 0.6775 - val_acc: 0.7245
Learning rate:  0.001
Epoch 7/300
267/266 [==============================] - 55s 208ms/step - loss: 0.6532 - acc: 0.7821 - val_loss: 0.6583 - val_acc: 0.7573
Learning rate:  0.001
Epoch 8/300
267/266 [==============================] - 55s 208ms/step - loss: 0.6111 - acc: 0.7853 - val_loss: 0.5852 - val_acc: 0.7685
Learning rate:  0.001
Epoch 9/300
267/266 [==============================] - 55s 208ms/step - loss: 0.5770 - acc: 0.7938 - val_loss: 0.6443 - val_acc: 0.7160
Learning rate:  0.001
Epoch 10/300
267/266 [==============================] - 86s 320ms/step - loss: 0.5539 - acc: 0.8095 - val_loss: 0.5792 - val_acc: 0.7526
Learning rate:  0.001
Epoch 11/300
267/266 [==============================] - 41s 155ms/step - loss: 0.5303 - acc: 0.8209 - val_loss: 0.4664 - val_acc: 0.8547
Learning rate:  0.001
Epoch 12/300
267/266 [==============================] - 42s 157ms/step - loss: 0.5319 - acc: 0.8099 - val_loss: 0.4473 - val_acc: 0.8772
Learning rate:  0.001
Epoch 13/300
267/266 [==============================] - 41s 154ms/step - loss: 0.5125 - acc: 0.8207 - val_loss: 0.4597 - val_acc: 0.8566
Learning rate:  0.001
Epoch 14/300
267/266 [==============================] - 41s 153ms/step - loss: 0.5050 - acc: 0.8219 - val_loss: 0.4211 - val_acc: 0.8697
Learning rate:  0.001
Epoch 15/300
267/266 [==============================] - 41s 153ms/step - loss: 0.4986 - acc: 0.8209 - val_loss: 0.4085 - val_acc: 0.8725
Learning rate:  0.001
Epoch 16/300
267/266 [==============================] - 41s 153ms/step - loss: 0.4891 - acc: 0.8282 - val_loss: 0.5489 - val_acc: 0.7601
Learning rate:  0.001
Epoch 17/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4809 - acc: 0.8242 - val_loss: 0.4046 - val_acc: 0.8894
Learning rate:  0.001
Epoch 18/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4679 - acc: 0.8294 - val_loss: 0.4575 - val_acc: 0.8397
Learning rate:  0.001
Epoch 19/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4626 - acc: 0.8326 - val_loss: 0.3654 - val_acc: 0.8941
Learning rate:  0.001
Epoch 20/300
267/266 [==============================] - 43s 161ms/step - loss: 0.4590 - acc: 0.8392 - val_loss: 0.4289 - val_acc: 0.8575
Learning rate:  0.001
Epoch 21/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4568 - acc: 0.8347 - val_loss: 0.4170 - val_acc: 0.8566
Learning rate:  0.001
Epoch 22/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4569 - acc: 0.8298 - val_loss: 0.4258 - val_acc: 0.8679
Learning rate:  0.001
Epoch 23/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4454 - acc: 0.8368 - val_loss: 0.4063 - val_acc: 0.8660
Learning rate:  0.001
Epoch 24/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4501 - acc: 0.8305 - val_loss: 0.4108 - val_acc: 0.8819
Learning rate:  0.001
Epoch 25/300
267/266 [==============================] - 42s 157ms/step - loss: 0.4415 - acc: 0.8415 - val_loss: 0.4105 - val_acc: 0.8650
Learning rate:  0.001
Epoch 26/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4367 - acc: 0.8401 - val_loss: 0.3830 - val_acc: 0.8913
Learning rate:  0.001
Epoch 27/300
267/266 [==============================] - 42s 157ms/step - loss: 0.4290 - acc: 0.8375 - val_loss: 0.4136 - val_acc: 0.8463
Learning rate:  0.001
Epoch 28/300
267/266 [==============================] - 42s 158ms/step - loss: 0.4241 - acc: 0.8436 - val_loss: 0.3452 - val_acc: 0.8903
Learning rate:  0.001
Epoch 29/300
267/266 [==============================] - 42s 157ms/step - loss: 0.4268 - acc: 0.8387 - val_loss: 0.3277 - val_acc: 0.8894
Learning rate:  0.001
Epoch 30/300
267/266 [==============================] - 43s 163ms/step - loss: 0.4196 - acc: 0.8467 - val_loss: 0.3820 - val_acc: 0.8875
Learning rate:  0.001
Epoch 31/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4154 - acc: 0.8478 - val_loss: 0.3914 - val_acc: 0.8735
Learning rate:  0.001
Epoch 32/300
267/266 [==============================] - 42s 157ms/step - loss: 0.4280 - acc: 0.8399 - val_loss: 0.3952 - val_acc: 0.8978
Learning rate:  0.001
Epoch 33/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4130 - acc: 0.8485 - val_loss: 0.3462 - val_acc: 0.8875
Learning rate:  0.001
Epoch 34/300
267/266 [==============================] - 42s 158ms/step - loss: 0.4121 - acc: 0.8500 - val_loss: 0.4305 - val_acc: 0.8454
Learning rate:  0.001
Epoch 35/300
267/266 [==============================] - 42s 159ms/step - loss: 0.4127 - acc: 0.8448 - val_loss: 0.3482 - val_acc: 0.8932
Learning rate:  0.001
Epoch 36/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4046 - acc: 0.8469 - val_loss: 0.4845 - val_acc: 0.8500
Learning rate:  0.001
Epoch 37/300
267/266 [==============================] - 42s 159ms/step - loss: 0.4003 - acc: 0.8518 - val_loss: 0.3853 - val_acc: 0.8875
Learning rate:  0.001
Epoch 38/300
267/266 [==============================] - 41s 155ms/step - loss: 0.4043 - acc: 0.8485 - val_loss: 0.3754 - val_acc: 0.8997
Learning rate:  0.001
Epoch 39/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3971 - acc: 0.8546 - val_loss: 0.4336 - val_acc: 0.8397
Learning rate:  0.001
Epoch 40/300
267/266 [==============================] - 41s 154ms/step - loss: 0.4035 - acc: 0.8478 - val_loss: 0.3944 - val_acc: 0.8632
Learning rate:  0.001
Epoch 41/300
267/266 [==============================] - 42s 156ms/step - loss: 0.4015 - acc: 0.8584 - val_loss: 0.3440 - val_acc: 0.8922
Learning rate:  0.001
Epoch 42/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3973 - acc: 0.8574 - val_loss: 0.3745 - val_acc: 0.8754
Learning rate:  0.001
Epoch 43/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3910 - acc: 0.8549 - val_loss: 0.3557 - val_acc: 0.8885
Learning rate:  0.001
Epoch 44/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3974 - acc: 0.8558 - val_loss: 0.3053 - val_acc: 0.8960
Learning rate:  0.001
Epoch 45/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3952 - acc: 0.8485 - val_loss: 0.4091 - val_acc: 0.8491
Learning rate:  0.001
Epoch 46/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3991 - acc: 0.8521 - val_loss: 0.3949 - val_acc: 0.8782
Learning rate:  0.001
Epoch 47/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3950 - acc: 0.8537 - val_loss: 0.3202 - val_acc: 0.8894
Learning rate:  0.001
Epoch 48/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3875 - acc: 0.8626 - val_loss: 0.3500 - val_acc: 0.8782
Learning rate:  0.001
Epoch 49/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3806 - acc: 0.8586 - val_loss: 0.3356 - val_acc: 0.8894
Learning rate:  0.001
Epoch 50/300
267/266 [==============================] - 42s 159ms/step - loss: 0.3679 - acc: 0.8666 - val_loss: 0.4536 - val_acc: 0.8838
Learning rate:  0.001
Epoch 51/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3836 - acc: 0.8567 - val_loss: 0.3149 - val_acc: 0.8978
Learning rate:  0.001
Epoch 52/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3841 - acc: 0.8579 - val_loss: 0.3958 - val_acc: 0.8435
Learning rate:  0.001
Epoch 53/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3755 - acc: 0.8659 - val_loss: 0.4136 - val_acc: 0.8519
Learning rate:  0.001
Epoch 54/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3898 - acc: 0.8596 - val_loss: 0.2912 - val_acc: 0.9072
Learning rate:  0.001
Epoch 55/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3698 - acc: 0.8663 - val_loss: 0.2823 - val_acc: 0.9100
Learning rate:  0.001
Epoch 56/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3700 - acc: 0.8621 - val_loss: 0.3931 - val_acc: 0.8538
Learning rate:  0.001
Epoch 57/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3827 - acc: 0.8579 - val_loss: 0.5438 - val_acc: 0.8041
Learning rate:  0.001
Epoch 58/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3717 - acc: 0.8720 - val_loss: 0.3104 - val_acc: 0.9016
Learning rate:  0.001
Epoch 59/300
267/266 [==============================] - 43s 161ms/step - loss: 0.3662 - acc: 0.8743 - val_loss: 0.3699 - val_acc: 0.8875
Learning rate:  0.001
Epoch 60/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3782 - acc: 0.8656 - val_loss: 0.3749 - val_acc: 0.8641
Learning rate:  0.001
Epoch 61/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3714 - acc: 0.8610 - val_loss: 0.3334 - val_acc: 0.8997
Learning rate:  0.001
Epoch 62/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3693 - acc: 0.8691 - val_loss: 0.5185 - val_acc: 0.8435
Learning rate:  0.001
Epoch 63/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3694 - acc: 0.8701 - val_loss: 0.3154 - val_acc: 0.8978
Learning rate:  0.001
Epoch 64/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3676 - acc: 0.8567 - val_loss: 0.2977 - val_acc: 0.9025
Learning rate:  0.001
Epoch 65/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3724 - acc: 0.8607 - val_loss: 0.5592 - val_acc: 0.7891
Learning rate:  0.001
Epoch 66/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3631 - acc: 0.8703 - val_loss: 0.3140 - val_acc: 0.8847
Learning rate:  0.001
Epoch 67/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3725 - acc: 0.8595 - val_loss: 0.3377 - val_acc: 0.8941
Learning rate:  0.001
Epoch 68/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3639 - acc: 0.8706 - val_loss: 0.3065 - val_acc: 0.8932
Learning rate:  0.001
Epoch 69/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3650 - acc: 0.8670 - val_loss: 0.3815 - val_acc: 0.8744
Learning rate:  0.001
Epoch 70/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3664 - acc: 0.8694 - val_loss: 0.3117 - val_acc: 0.9063
Learning rate:  0.001
Epoch 71/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3548 - acc: 0.8750 - val_loss: 0.3767 - val_acc: 0.8744
Learning rate:  0.001
Epoch 72/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3659 - acc: 0.8663 - val_loss: 0.3886 - val_acc: 0.8585
Learning rate:  0.001
Epoch 73/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3728 - acc: 0.8588 - val_loss: 0.3070 - val_acc: 0.8894
Learning rate:  0.001
Epoch 74/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3596 - acc: 0.8729 - val_loss: 0.3877 - val_acc: 0.8969
Learning rate:  0.001
Epoch 75/300
267/266 [==============================] - 42s 159ms/step - loss: 0.3681 - acc: 0.8661 - val_loss: 0.3774 - val_acc: 0.8791
Learning rate:  0.001
Epoch 76/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3543 - acc: 0.8708 - val_loss: 0.2985 - val_acc: 0.9082
Learning rate:  0.001
Epoch 77/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3688 - acc: 0.8614 - val_loss: 0.3176 - val_acc: 0.9025
Learning rate:  0.001
Epoch 78/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3603 - acc: 0.8724 - val_loss: 0.3769 - val_acc: 0.8679
Learning rate:  0.001
Epoch 79/300
267/266 [==============================] - 43s 159ms/step - loss: 0.3629 - acc: 0.8619 - val_loss: 0.2838 - val_acc: 0.9053
Learning rate:  0.001
Epoch 80/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3582 - acc: 0.8743 - val_loss: 0.2940 - val_acc: 0.9072
Learning rate:  0.001
Epoch 81/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3549 - acc: 0.8790 - val_loss: 0.3435 - val_acc: 0.8978
Learning rate:  0.001
Epoch 82/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3668 - acc: 0.8645 - val_loss: 0.5772 - val_acc: 0.7704
Learning rate:  0.001
Epoch 83/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3596 - acc: 0.8708 - val_loss: 0.3052 - val_acc: 0.8978
Learning rate:  0.001
Epoch 84/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3644 - acc: 0.8607 - val_loss: 0.3053 - val_acc: 0.9072
Learning rate:  0.001
Epoch 85/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3631 - acc: 0.8731 - val_loss: 0.2862 - val_acc: 0.9063
Learning rate:  0.001
Epoch 86/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3643 - acc: 0.8745 - val_loss: 0.3175 - val_acc: 0.9007
Learning rate:  0.001
Epoch 87/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3522 - acc: 0.8715 - val_loss: 0.4024 - val_acc: 0.8688
Learning rate:  0.001
Epoch 88/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3477 - acc: 0.8794 - val_loss: 0.3609 - val_acc: 0.8894
Learning rate:  0.001
Epoch 89/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3575 - acc: 0.8738 - val_loss: 0.3560 - val_acc: 0.8941
Learning rate:  0.001
Epoch 90/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3450 - acc: 0.8736 - val_loss: 0.3194 - val_acc: 0.8997
Learning rate:  0.001
Epoch 91/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3596 - acc: 0.8720 - val_loss: 0.3311 - val_acc: 0.8894
Learning rate:  0.001
Epoch 92/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3550 - acc: 0.8731 - val_loss: 0.5359 - val_acc: 0.8463
Learning rate:  0.001
Epoch 93/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3499 - acc: 0.8666 - val_loss: 0.3001 - val_acc: 0.8969
Learning rate:  0.001
Epoch 94/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3577 - acc: 0.8778 - val_loss: 0.3842 - val_acc: 0.8557
Learning rate:  0.001
Epoch 95/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3404 - acc: 0.8757 - val_loss: 0.4454 - val_acc: 0.8950
Learning rate:  0.001
Epoch 96/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3708 - acc: 0.8652 - val_loss: 0.3063 - val_acc: 0.9072
Learning rate:  0.001
Epoch 97/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3487 - acc: 0.8816 - val_loss: 0.3210 - val_acc: 0.8997
Learning rate:  0.001
Epoch 98/300
267/266 [==============================] - 43s 161ms/step - loss: 0.3548 - acc: 0.8755 - val_loss: 0.2994 - val_acc: 0.9035
Learning rate:  0.001
Epoch 99/300
267/266 [==============================] - 43s 159ms/step - loss: 0.3600 - acc: 0.8617 - val_loss: 0.2793 - val_acc: 0.9100
Learning rate:  0.001
Epoch 100/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3496 - acc: 0.8766 - val_loss: 0.3179 - val_acc: 0.8913
Learning rate:  0.001
Epoch 101/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3416 - acc: 0.8832 - val_loss: 0.5108 - val_acc: 0.8632
Learning rate:  0.001
Epoch 102/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3495 - acc: 0.8715 - val_loss: 0.4145 - val_acc: 0.8847
Learning rate:  0.001
Epoch 103/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3524 - acc: 0.8769 - val_loss: 0.2953 - val_acc: 0.8978
Learning rate:  0.001
Epoch 104/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3430 - acc: 0.8799 - val_loss: 0.3137 - val_acc: 0.9119
Learning rate:  0.001
Epoch 105/300
267/266 [==============================] - 43s 163ms/step - loss: 0.3423 - acc: 0.8811 - val_loss: 0.3795 - val_acc: 0.8716
Learning rate:  0.001
Epoch 106/300
267/266 [==============================] - 41s 155ms/step - loss: 0.3583 - acc: 0.8708 - val_loss: 0.3221 - val_acc: 0.8922
Learning rate:  0.001
Epoch 107/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3480 - acc: 0.8722 - val_loss: 0.3328 - val_acc: 0.9035
Learning rate:  0.001
Epoch 108/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3471 - acc: 0.8762 - val_loss: 0.4301 - val_acc: 0.8210
Learning rate:  0.001
Epoch 109/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3550 - acc: 0.8741 - val_loss: 0.3063 - val_acc: 0.8941
Learning rate:  0.001
Epoch 110/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3462 - acc: 0.8783 - val_loss: 0.4097 - val_acc: 0.9007
Learning rate:  0.001
Epoch 111/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3425 - acc: 0.8771 - val_loss: 0.3120 - val_acc: 0.8978
Learning rate:  0.001
Epoch 112/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3452 - acc: 0.8743 - val_loss: 0.5248 - val_acc: 0.8472
Learning rate:  0.001
Epoch 113/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3393 - acc: 0.8839 - val_loss: 0.3018 - val_acc: 0.9053
Learning rate:  0.001
Epoch 114/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3308 - acc: 0.8846 - val_loss: 0.4335 - val_acc: 0.8800
Learning rate:  0.001
Epoch 115/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3531 - acc: 0.8696 - val_loss: 0.2685 - val_acc: 0.9091
Learning rate:  0.001
Epoch 116/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3414 - acc: 0.8802 - val_loss: 0.2929 - val_acc: 0.9100
Learning rate:  0.001
Epoch 117/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3382 - acc: 0.8797 - val_loss: 0.3884 - val_acc: 0.8613
Learning rate:  0.001
Epoch 118/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3385 - acc: 0.8820 - val_loss: 0.3474 - val_acc: 0.8969
Learning rate:  0.001
Epoch 119/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3379 - acc: 0.8790 - val_loss: 0.6087 - val_acc: 0.7966
Learning rate:  0.001
Epoch 120/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3486 - acc: 0.8773 - val_loss: 0.3892 - val_acc: 0.8838
Learning rate:  0.001
Epoch 121/300
267/266 [==============================] - 42s 158ms/step - loss: 0.3284 - acc: 0.8869 - val_loss: 0.3038 - val_acc: 0.8997
Learning rate:  0.001
Epoch 122/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3249 - acc: 0.8881 - val_loss: 0.2910 - val_acc: 0.9025
Learning rate:  0.001
Epoch 123/300
267/266 [==============================] - 42s 155ms/step - loss: 0.3418 - acc: 0.8764 - val_loss: 0.3962 - val_acc: 0.8697
Learning rate:  0.001
Epoch 124/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3376 - acc: 0.8797 - val_loss: 0.3092 - val_acc: 0.9016
Learning rate:  0.001
Epoch 125/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3461 - acc: 0.8713 - val_loss: 0.2956 - val_acc: 0.8969
Learning rate:  0.001
Epoch 126/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3245 - acc: 0.8862 - val_loss: 0.4312 - val_acc: 0.8538
Learning rate:  0.001
Epoch 127/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3386 - acc: 0.8823 - val_loss: 0.3301 - val_acc: 0.8810
Learning rate:  0.001
Epoch 128/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3303 - acc: 0.8853 - val_loss: 0.5613 - val_acc: 0.8182
Learning rate:  0.001
Epoch 129/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3441 - acc: 0.8790 - val_loss: 0.4035 - val_acc: 0.8622
Learning rate:  0.001
Epoch 130/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3410 - acc: 0.8825 - val_loss: 0.5264 - val_acc: 0.8088
Learning rate:  0.001
Epoch 131/300
267/266 [==============================] - 42s 159ms/step - loss: 0.3416 - acc: 0.8785 - val_loss: 0.3612 - val_acc: 0.9035
Learning rate:  0.001
Epoch 132/300
267/266 [==============================] - 43s 160ms/step - loss: 0.3403 - acc: 0.8792 - val_loss: 0.2865 - val_acc: 0.9147
Learning rate:  0.001
Epoch 133/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3354 - acc: 0.8846 - val_loss: 0.2812 - val_acc: 0.9072
Learning rate:  0.001
Epoch 134/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3274 - acc: 0.8879 - val_loss: 0.3376 - val_acc: 0.8894
Learning rate:  0.001
Epoch 135/300
267/266 [==============================] - 42s 155ms/step - loss: 0.3417 - acc: 0.8846 - val_loss: 0.4318 - val_acc: 0.8322
Learning rate:  0.001
Epoch 136/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3264 - acc: 0.8811 - val_loss: 0.3852 - val_acc: 0.8650
Learning rate:  0.001
Epoch 137/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3221 - acc: 0.8851 - val_loss: 0.4538 - val_acc: 0.8960
Learning rate:  0.001
Epoch 138/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3251 - acc: 0.8825 - val_loss: 0.2898 - val_acc: 0.9128
Learning rate:  0.001
Epoch 139/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3361 - acc: 0.8879 - val_loss: 0.2930 - val_acc: 0.9138
Learning rate:  0.001
Epoch 140/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3345 - acc: 0.8830 - val_loss: 0.3511 - val_acc: 0.8997
Learning rate:  0.001
Epoch 141/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3377 - acc: 0.8846 - val_loss: 0.3312 - val_acc: 0.8838
Learning rate:  0.001
Epoch 142/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3329 - acc: 0.8783 - val_loss: 0.3381 - val_acc: 0.8997
Learning rate:  0.001
Epoch 143/300
267/266 [==============================] - 42s 157ms/step - loss: 0.3325 - acc: 0.8883 - val_loss: 0.3142 - val_acc: 0.9016
Learning rate:  0.001
Epoch 144/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3274 - acc: 0.8799 - val_loss: 0.3244 - val_acc: 0.8838
Learning rate:  0.001
Epoch 145/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3278 - acc: 0.8832 - val_loss: 0.4128 - val_acc: 0.8885
Learning rate:  0.001
Epoch 146/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3266 - acc: 0.8851 - val_loss: 0.3713 - val_acc: 0.8735
Learning rate:  0.001
Epoch 147/300
267/266 [==============================] - 41s 154ms/step - loss: 0.3292 - acc: 0.8844 - val_loss: 0.3479 - val_acc: 0.8950
Learning rate:  0.001
Epoch 148/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3432 - acc: 0.8792 - val_loss: 0.3563 - val_acc: 0.9016
Learning rate:  0.001
Epoch 149/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3360 - acc: 0.8771 - val_loss: 0.3131 - val_acc: 0.9053
Learning rate:  0.001
Epoch 150/300
267/266 [==============================] - 42s 159ms/step - loss: 0.3306 - acc: 0.8827 - val_loss: 1.4000 - val_acc: 0.6626
Learning rate:  0.001
Epoch 151/300
267/266 [==============================] - 42s 156ms/step - loss: 0.3352 - acc: 0.8858 - val_loss: 0.4423 - val_acc: 0.8632
Learning rate:  0.0001
Epoch 152/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2999 - acc: 0.8949 - val_loss: 0.2831 - val_acc: 0.9203
Learning rate:  0.0001
Epoch 153/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2940 - acc: 0.8956 - val_loss: 0.2697 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 154/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2861 - acc: 0.8996 - val_loss: 0.2645 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 155/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2868 - acc: 0.9052 - val_loss: 0.2610 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 156/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2812 - acc: 0.9038 - val_loss: 0.2695 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 157/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2773 - acc: 0.9047 - val_loss: 0.2632 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 158/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2636 - acc: 0.9132 - val_loss: 0.2701 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 159/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2812 - acc: 0.9099 - val_loss: 0.2631 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 160/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2744 - acc: 0.9089 - val_loss: 0.2656 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 161/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2785 - acc: 0.9038 - val_loss: 0.2715 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 162/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2732 - acc: 0.9066 - val_loss: 0.2626 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 163/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2769 - acc: 0.9045 - val_loss: 0.2708 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 164/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2645 - acc: 0.9141 - val_loss: 0.2684 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 165/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2862 - acc: 0.9043 - val_loss: 0.2655 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 166/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2731 - acc: 0.9103 - val_loss: 0.2635 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 167/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2713 - acc: 0.9115 - val_loss: 0.2802 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 168/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2656 - acc: 0.9108 - val_loss: 0.2720 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 169/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2777 - acc: 0.9038 - val_loss: 0.2650 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 170/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2684 - acc: 0.9111 - val_loss: 0.2572 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 171/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2707 - acc: 0.9099 - val_loss: 0.2692 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 172/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2629 - acc: 0.9169 - val_loss: 0.2760 - val_acc: 0.9119
Learning rate:  0.0001
Epoch 173/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2701 - acc: 0.9080 - val_loss: 0.2767 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 174/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2660 - acc: 0.9075 - val_loss: 0.2611 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 175/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2704 - acc: 0.9085 - val_loss: 0.2670 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 176/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2804 - acc: 0.9054 - val_loss: 0.2569 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 177/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2699 - acc: 0.9110 - val_loss: 0.2544 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 178/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2612 - acc: 0.9115 - val_loss: 0.2681 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 179/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2689 - acc: 0.9099 - val_loss: 0.2539 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 180/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2684 - acc: 0.9115 - val_loss: 0.2714 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 181/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2653 - acc: 0.9113 - val_loss: 0.2640 - val_acc: 0.9185
Learning rate:  0.0001
Epoch 182/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2524 - acc: 0.9127 - val_loss: 0.2642 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 183/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2698 - acc: 0.9071 - val_loss: 0.2658 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 184/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2632 - acc: 0.9134 - val_loss: 0.2578 - val_acc: 0.9185
Learning rate:  0.0001
Epoch 185/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2591 - acc: 0.9122 - val_loss: 0.2643 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 186/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2664 - acc: 0.9087 - val_loss: 0.2583 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 187/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2634 - acc: 0.9139 - val_loss: 0.2565 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 188/300
267/266 [==============================] - 43s 160ms/step - loss: 0.2615 - acc: 0.9120 - val_loss: 0.2547 - val_acc: 0.9175
Learning rate:  0.0001
Epoch 189/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2674 - acc: 0.9080 - val_loss: 0.2577 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 190/300
267/266 [==============================] - 41s 155ms/step - loss: 0.2688 - acc: 0.9082 - val_loss: 0.2534 - val_acc: 0.9222
Learning rate:  0.0001
Epoch 191/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2564 - acc: 0.9185 - val_loss: 0.2681 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 192/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2599 - acc: 0.9129 - val_loss: 0.2673 - val_acc: 0.9185
Learning rate:  0.0001
Epoch 193/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2662 - acc: 0.9078 - val_loss: 0.2725 - val_acc: 0.9157
Learning rate:  0.0001
Epoch 194/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2648 - acc: 0.9085 - val_loss: 0.2642 - val_acc: 0.9213
Learning rate:  0.0001
Epoch 195/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2770 - acc: 0.9057 - val_loss: 0.2603 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 196/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2574 - acc: 0.9146 - val_loss: 0.2617 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 197/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2607 - acc: 0.9136 - val_loss: 0.2670 - val_acc: 0.9166
Learning rate:  0.0001
Epoch 198/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2573 - acc: 0.9106 - val_loss: 0.2662 - val_acc: 0.9194
Learning rate:  0.0001
Epoch 199/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2545 - acc: 0.9132 - val_loss: 0.2636 - val_acc: 0.9185
Learning rate:  0.0001
Epoch 200/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2628 - acc: 0.9143 - val_loss: 0.2600 - val_acc: 0.9213
Learning rate:  0.0001
Epoch 201/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2666 - acc: 0.9113 - val_loss: 0.2637 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 202/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2543 - acc: 0.9129 - val_loss: 0.2570 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 203/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2515 - acc: 0.9157 - val_loss: 0.2568 - val_acc: 0.9222
Learning rate:  1e-05
Epoch 204/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2477 - acc: 0.9176 - val_loss: 0.2604 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 205/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2597 - acc: 0.9157 - val_loss: 0.2597 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 206/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2525 - acc: 0.9185 - val_loss: 0.2587 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 207/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2433 - acc: 0.9218 - val_loss: 0.2602 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 208/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2575 - acc: 0.9146 - val_loss: 0.2584 - val_acc: 0.9222
Learning rate:  1e-05
Epoch 209/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2602 - acc: 0.9099 - val_loss: 0.2590 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 210/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2496 - acc: 0.9221 - val_loss: 0.2567 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 211/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2515 - acc: 0.9174 - val_loss: 0.2557 - val_acc: 0.9185
Learning rate:  1e-05
Epoch 212/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2547 - acc: 0.9174 - val_loss: 0.2582 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 213/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2489 - acc: 0.9155 - val_loss: 0.2585 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 214/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2508 - acc: 0.9178 - val_loss: 0.2583 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 215/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2508 - acc: 0.9153 - val_loss: 0.2584 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 216/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2506 - acc: 0.9206 - val_loss: 0.2587 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 217/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2669 - acc: 0.9073 - val_loss: 0.2565 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 218/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2579 - acc: 0.9146 - val_loss: 0.2573 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 219/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2573 - acc: 0.9155 - val_loss: 0.2556 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 220/300
267/266 [==============================] - 41s 155ms/step - loss: 0.2529 - acc: 0.9143 - val_loss: 0.2579 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 221/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2546 - acc: 0.9127 - val_loss: 0.2596 - val_acc: 0.9185
Learning rate:  1e-05
Epoch 222/300
267/266 [==============================] - 41s 155ms/step - loss: 0.2544 - acc: 0.9132 - val_loss: 0.2555 - val_acc: 0.9241
Learning rate:  1e-05
Epoch 223/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2521 - acc: 0.9148 - val_loss: 0.2582 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 224/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2437 - acc: 0.9216 - val_loss: 0.2580 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 225/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2530 - acc: 0.9146 - val_loss: 0.2564 - val_acc: 0.9222
Learning rate:  1e-05
Epoch 226/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2493 - acc: 0.9153 - val_loss: 0.2576 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 227/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2490 - acc: 0.9183 - val_loss: 0.2577 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 228/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2566 - acc: 0.9143 - val_loss: 0.2581 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 229/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2580 - acc: 0.9108 - val_loss: 0.2563 - val_acc: 0.9166
Learning rate:  1e-05
Epoch 230/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2584 - acc: 0.9176 - val_loss: 0.2533 - val_acc: 0.9222
Learning rate:  1e-05
Epoch 231/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2536 - acc: 0.9125 - val_loss: 0.2553 - val_acc: 0.9222
Learning rate:  1e-05
Epoch 232/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2460 - acc: 0.9197 - val_loss: 0.2585 - val_acc: 0.9175
Learning rate:  1e-05
Epoch 233/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2571 - acc: 0.9113 - val_loss: 0.2590 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 234/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2582 - acc: 0.9141 - val_loss: 0.2599 - val_acc: 0.9194
Learning rate:  1e-05
Epoch 235/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2503 - acc: 0.9162 - val_loss: 0.2558 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 236/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2519 - acc: 0.9153 - val_loss: 0.2562 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 237/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2522 - acc: 0.9155 - val_loss: 0.2572 - val_acc: 0.9203
Learning rate:  1e-05
Epoch 238/300
267/266 [==============================] - 43s 159ms/step - loss: 0.2469 - acc: 0.9195 - val_loss: 0.2563 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 239/300
267/266 [==============================] - 43s 162ms/step - loss: 0.2452 - acc: 0.9251 - val_loss: 0.2594 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 240/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2537 - acc: 0.9150 - val_loss: 0.2574 - val_acc: 0.9213
Learning rate:  1e-05
Epoch 241/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2422 - acc: 0.9223 - val_loss: 0.2594 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 242/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2496 - acc: 0.9181 - val_loss: 0.2600 - val_acc: 0.9203
Learning rate:  1e-06
Epoch 243/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2483 - acc: 0.9213 - val_loss: 0.2608 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 244/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2537 - acc: 0.9136 - val_loss: 0.2608 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 245/300
267/266 [==============================] - 42s 159ms/step - loss: 0.2518 - acc: 0.9190 - val_loss: 0.2595 - val_acc: 0.9203
Learning rate:  1e-06
Epoch 246/300
267/266 [==============================] - 43s 161ms/step - loss: 0.2528 - acc: 0.9157 - val_loss: 0.2598 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 247/300
267/266 [==============================] - 43s 159ms/step - loss: 0.2493 - acc: 0.9164 - val_loss: 0.2588 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 248/300
267/266 [==============================] - 42s 155ms/step - loss: 0.2379 - acc: 0.9185 - val_loss: 0.2605 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 249/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2540 - acc: 0.9176 - val_loss: 0.2598 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 250/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2620 - acc: 0.9127 - val_loss: 0.2594 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 251/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2548 - acc: 0.9127 - val_loss: 0.2581 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 252/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2482 - acc: 0.9164 - val_loss: 0.2597 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 253/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2428 - acc: 0.9209 - val_loss: 0.2581 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 254/300
267/266 [==============================] - 43s 159ms/step - loss: 0.2511 - acc: 0.9155 - val_loss: 0.2592 - val_acc: 0.9203
Learning rate:  1e-06
Epoch 255/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2498 - acc: 0.9150 - val_loss: 0.2593 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 256/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2468 - acc: 0.9218 - val_loss: 0.2584 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 257/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2492 - acc: 0.9223 - val_loss: 0.2586 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 258/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2518 - acc: 0.9169 - val_loss: 0.2592 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 259/300
267/266 [==============================] - 43s 162ms/step - loss: 0.2494 - acc: 0.9169 - val_loss: 0.2589 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 260/300
267/266 [==============================] - 42s 155ms/step - loss: 0.2569 - acc: 0.9153 - val_loss: 0.2589 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 261/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2388 - acc: 0.9204 - val_loss: 0.2591 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 262/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2464 - acc: 0.9232 - val_loss: 0.2581 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 263/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2532 - acc: 0.9164 - val_loss: 0.2583 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 264/300
267/266 [==============================] - 43s 160ms/step - loss: 0.2424 - acc: 0.9202 - val_loss: 0.2578 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 265/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2466 - acc: 0.9197 - val_loss: 0.2581 - val_acc: 0.9194
Learning rate:  1e-06
Epoch 266/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2459 - acc: 0.9169 - val_loss: 0.2580 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 267/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2560 - acc: 0.9127 - val_loss: 0.2588 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 268/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2401 - acc: 0.9190 - val_loss: 0.2585 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 269/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2616 - acc: 0.9181 - val_loss: 0.2590 - val_acc: 0.9213
Learning rate:  1e-06
Epoch 270/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2559 - acc: 0.9169 - val_loss: 0.2569 - val_acc: 0.9222
Learning rate:  1e-06
Epoch 271/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2497 - acc: 0.9136 - val_loss: 0.2582 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 272/300
267/266 [==============================] - 43s 161ms/step - loss: 0.2486 - acc: 0.9160 - val_loss: 0.2583 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 273/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2427 - acc: 0.9169 - val_loss: 0.2578 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 274/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2515 - acc: 0.9143 - val_loss: 0.2590 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 275/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2387 - acc: 0.9185 - val_loss: 0.2580 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 276/300
267/266 [==============================] - 42s 155ms/step - loss: 0.2397 - acc: 0.9218 - val_loss: 0.2578 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 277/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2512 - acc: 0.9148 - val_loss: 0.2582 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 278/300
267/266 [==============================] - 42s 159ms/step - loss: 0.2496 - acc: 0.9188 - val_loss: 0.2583 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 279/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2385 - acc: 0.9209 - val_loss: 0.2583 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 280/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2446 - acc: 0.9230 - val_loss: 0.2578 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 281/300
267/266 [==============================] - 43s 160ms/step - loss: 0.2526 - acc: 0.9183 - val_loss: 0.2564 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 282/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2560 - acc: 0.9167 - val_loss: 0.2578 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 283/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2528 - acc: 0.9141 - val_loss: 0.2586 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 284/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2522 - acc: 0.9160 - val_loss: 0.2570 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 285/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2560 - acc: 0.9143 - val_loss: 0.2585 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 286/300
267/266 [==============================] - 41s 155ms/step - loss: 0.2497 - acc: 0.9139 - val_loss: 0.2566 - val_acc: 0.9231
Learning rate:  5e-07
Epoch 287/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2563 - acc: 0.9164 - val_loss: 0.2579 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 288/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2393 - acc: 0.9192 - val_loss: 0.2584 - val_acc: 0.9222
Learning rate:  5e-07
Epoch 289/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2514 - acc: 0.9157 - val_loss: 0.2571 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 290/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2474 - acc: 0.9183 - val_loss: 0.2577 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 291/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2470 - acc: 0.9160 - val_loss: 0.2576 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 292/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2514 - acc: 0.9204 - val_loss: 0.2585 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 293/300
267/266 [==============================] - 42s 159ms/step - loss: 0.2573 - acc: 0.9139 - val_loss: 0.2574 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 294/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2528 - acc: 0.9169 - val_loss: 0.2577 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 295/300
267/266 [==============================] - 41s 154ms/step - loss: 0.2578 - acc: 0.9150 - val_loss: 0.2576 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 296/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2593 - acc: 0.9160 - val_loss: 0.2578 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 297/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2566 - acc: 0.9150 - val_loss: 0.2578 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 298/300
267/266 [==============================] - 42s 157ms/step - loss: 0.2513 - acc: 0.9183 - val_loss: 0.2581 - val_acc: 0.9213
Learning rate:  5e-07
Epoch 299/300
267/266 [==============================] - 42s 156ms/step - loss: 0.2511 - acc: 0.9139 - val_loss: 0.2577 - val_acc: 0.9203
Learning rate:  5e-07
Epoch 300/300
267/266 [==============================] - 42s 158ms/step - loss: 0.2456 - acc: 0.9183 - val_loss: 0.2589 - val_acc: 0.9203
Out[12]:
<tensorflow.python.keras.callbacks.History at 0x2ed347ef668>
In [13]:
## Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [14]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['non-flower', 'flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 92.41%
              precision    recall  f1-score   support

  non-flower     0.8960    0.9295    0.9124       454
      flower     0.9463    0.9201    0.9330       613

    accuracy                         0.9241      1067
   macro avg     0.9211    0.9248    0.9227      1067
weighted avg     0.9249    0.9241    0.9243      1067

[[422  32]
 [ 49 564]]
In [15]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [16]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [17]:
wrong_ans_index = list(set(wrong_ans_index))
In [18]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 384 index image
Predicted as 0 but is actually 1
Showing 3 index image
Predicted as 0 but is actually 1
Showing 131 index image
Predicted as 0 but is actually 1
Showing 133 index image
Predicted as 0 but is actually 1
Showing 645 index image
Predicted as 1 but is actually 0
Showing 775 index image
Predicted as 1 but is actually 0
Showing 904 index image
Predicted as 1 but is actually 0
Showing 137 index image
Predicted as 0 but is actually 1
Showing 905 index image
Predicted as 1 but is actually 0
Showing 524 index image
Predicted as 0 but is actually 1
Showing 269 index image
Predicted as 0 but is actually 1
Showing 398 index image
Predicted as 0 but is actually 1
Showing 893 index image
Predicted as 1 but is actually 0
Showing 909 index image
Predicted as 1 but is actually 0
Showing 914 index image
Predicted as 1 but is actually 0
Showing 147 index image
Predicted as 0 but is actually 1
Showing 148 index image
Predicted as 0 but is actually 1
Showing 915 index image
Predicted as 1 but is actually 0
Showing 790 index image
Predicted as 1 but is actually 0
Showing 279 index image
Predicted as 0 but is actually 1
Showing 791 index image
Predicted as 1 but is actually 0
Showing 792 index image
Predicted as 1 but is actually 0
Showing 919 index image
Predicted as 1 but is actually 0
Showing 539 index image
Predicted as 0 but is actually 1
Showing 540 index image
Predicted as 0 but is actually 1
Showing 921 index image
Predicted as 1 but is actually 0
Showing 1022 index image
Predicted as 1 but is actually 0
Showing 1054 index image
Predicted as 1 but is actually 0
Showing 288 index image
Predicted as 0 but is actually 1
Showing 1056 index image
Predicted as 1 but is actually 0
Showing 675 index image
Predicted as 1 but is actually 0
Showing 420 index image
Predicted as 0 but is actually 1
Showing 805 index image
Predicted as 1 but is actually 0
Showing 551 index image
Predicted as 0 but is actually 1
Showing 169 index image
Predicted as 0 but is actually 1
Showing 556 index image
Predicted as 0 but is actually 1
Showing 942 index image
Predicted as 1 but is actually 0
Showing 175 index image
Predicted as 0 but is actually 1
Showing 176 index image
Predicted as 0 but is actually 1
Showing 50 index image
Predicted as 0 but is actually 1
Showing 311 index image
Predicted as 0 but is actually 1
Showing 695 index image
Predicted as 1 but is actually 0
Showing 955 index image
Predicted as 1 but is actually 0
Showing 830 index image
Predicted as 1 but is actually 0
Showing 958 index image
Predicted as 1 but is actually 0
Showing 964 index image
Predicted as 1 but is actually 0
Showing 583 index image
Predicted as 0 but is actually 1
Showing 72 index image
Predicted as 0 but is actually 1
Showing 329 index image
Predicted as 0 but is actually 1
Showing 712 index image
Predicted as 1 but is actually 0
Showing 75 index image
Predicted as 0 but is actually 1
Showing 204 index image
Predicted as 0 but is actually 1
Showing 205 index image
Predicted as 0 but is actually 1
Showing 974 index image
Predicted as 1 but is actually 0
Showing 336 index image
Predicted as 0 but is actually 1
Showing 596 index image
Predicted as 0 but is actually 1
Showing 469 index image
Predicted as 0 but is actually 1
Showing 982 index image
Predicted as 1 but is actually 0
Showing 471 index image
Predicted as 0 but is actually 1
Showing 474 index image
Predicted as 0 but is actually 1
Showing 347 index image
Predicted as 0 but is actually 1
Showing 220 index image
Predicted as 0 but is actually 1
Showing 92 index image
Predicted as 0 but is actually 1
Showing 605 index image
Predicted as 0 but is actually 1
Showing 733 index image
Predicted as 1 but is actually 0
Showing 228 index image
Predicted as 0 but is actually 1
Showing 487 index image
Predicted as 0 but is actually 1
Showing 615 index image
Predicted as 1 but is actually 0
Showing 233 index image
Predicted as 0 but is actually 1
Showing 616 index image
Predicted as 1 but is actually 0
Showing 491 index image
Predicted as 0 but is actually 1
Showing 743 index image
Predicted as 1 but is actually 0
Showing 365 index image
Predicted as 0 but is actually 1
Showing 367 index image
Predicted as 0 but is actually 1
Showing 752 index image
Predicted as 1 but is actually 0
Showing 113 index image
Predicted as 0 but is actually 1
Showing 498 index image
Predicted as 0 but is actually 1
Showing 375 index image
Predicted as 0 but is actually 1
Showing 505 index image
Predicted as 0 but is actually 1
Showing 253 index image
Predicted as 0 but is actually 1
Showing 382 index image
Predicted as 0 but is actually 1
In [19]:
# Stacking 3 NNs?